retour\alloc/
proximity.rs

1use std::ops::Range;
2use std::slice;
3
4use slice_pool::sync::{SliceBox, SlicePool};
5
6use super::search as region_search;
7use crate::error::{Error, Result};
8
9/// Defines the allocation type.
10pub type Allocation = SliceBox<u8>;
11
12/// Shared instance containing all pools
13pub struct ProximityAllocator {
14  pub max_distance: usize,
15  pub pools: Vec<SlicePool<u8>>,
16}
17
18impl ProximityAllocator {
19  /// Allocates a slice in an eligible memory map.
20  pub fn allocate(&mut self, origin: *const (), size: usize) -> Result<Allocation> {
21    let memory_range = ((origin as usize).saturating_sub(self.max_distance))
22      ..((origin as usize).saturating_add(self.max_distance));
23
24    // Check if an existing pool can handle the allocation request
25    self.allocate_memory(&memory_range, size).or_else(|_| {
26      // ... otherwise allocate a pool within the memory range
27      self.allocate_pool(&memory_range, origin, size).map(|pool| {
28        // Use the newly allocated pool for the request
29        let allocation = pool.alloc(size).unwrap();
30        self.pools.push(pool);
31        allocation
32      })
33    })
34  }
35
36  /// Releases the memory pool associated with an allocation.
37  pub fn release(&mut self, value: &Allocation) {
38    // Find the associated memory pool
39    let index = self
40      .pools
41      .iter()
42      .position(|pool| {
43        let lower = pool.as_ptr() as usize;
44        let upper = lower + pool.len();
45
46        // Determine if this is the associated memory pool
47        (lower..upper).contains(&(value.as_ptr() as usize))
48      })
49      .expect("retrieving associated memory pool");
50
51    // Release the pool if the associated allocation is unique
52    if self.pools[index].len() == 1 {
53      self.pools.remove(index);
54    }
55  }
56
57  /// Allocates a chunk using any of the existing pools.
58  fn allocate_memory(&mut self, range: &Range<usize>, size: usize) -> Result<Allocation> {
59    // Returns true if the pool's memory is within the range
60    let is_pool_in_range = |pool: &SlicePool<u8>| {
61      let lower = pool.as_ptr() as usize;
62      let upper = lower + pool.len();
63      range.contains(&lower) && range.contains(&(upper - 1))
64    };
65
66    // Tries to allocate a slice within any eligible pool
67    self
68      .pools
69      .iter_mut()
70      .filter_map(|pool| {
71        if is_pool_in_range(pool) {
72          pool.alloc(size)
73        } else {
74          None
75        }
76      })
77      .next()
78      .ok_or(Error::OutOfMemory)
79  }
80
81  /// Allocates a new pool close to `origin`.
82  fn allocate_pool(
83    &mut self,
84    range: &Range<usize>,
85    origin: *const (),
86    size: usize,
87  ) -> Result<SlicePool<u8>> {
88    let before = region_search::before(origin, Some(range.clone()));
89    let after = region_search::after(origin, Some(range.clone()));
90
91    // TODO: Part of the pool can be out of range
92    // Try to allocate after the specified address first (mostly because
93    // macOS cannot allocate memory before the process's address).
94    after
95      .chain(before)
96      .filter_map(|result| match result {
97        Ok(address) => Self::allocate_fixed_pool(address, size).map(Ok),
98        Err(error) => Some(Err(error)),
99      })
100      .next()
101      .unwrap_or(Err(Error::OutOfMemory))
102  }
103
104  /// Tries to allocate fixed memory at the specified address.
105  fn allocate_fixed_pool(address: *const (), size: usize) -> Option<SlicePool<u8>> {
106    // Try to allocate memory at the specified address
107    mmap::MemoryMap::new(
108      size,
109      &[
110        mmap::MapOption::MapReadable,
111        mmap::MapOption::MapWritable,
112        mmap::MapOption::MapExecutable,
113        mmap::MapOption::MapAddr(address as *const _),
114      ],
115    )
116    .ok()
117    .map(SliceableMemoryMap)
118    .map(SlicePool::new)
119  }
120}
121
122// TODO: Use memmap-rs instead
123/// A wrapper for making a memory map compatible with `SlicePool`.
124struct SliceableMemoryMap(mmap::MemoryMap);
125
126impl SliceableMemoryMap {
127  pub fn as_slice(&self) -> &[u8] {
128    unsafe { slice::from_raw_parts(self.0.data(), self.0.len()) }
129  }
130
131  pub fn as_mut_slice(&mut self) -> &mut [u8] {
132    unsafe { slice::from_raw_parts_mut(self.0.data(), self.0.len()) }
133  }
134}
135
136impl AsRef<[u8]> for SliceableMemoryMap {
137  fn as_ref(&self) -> &[u8] {
138    self.as_slice()
139  }
140}
141
142impl AsMut<[u8]> for SliceableMemoryMap {
143  fn as_mut(&mut self) -> &mut [u8] {
144    self.as_mut_slice()
145  }
146}
147
148unsafe impl Send for SliceableMemoryMap {}
149unsafe impl Sync for SliceableMemoryMap {}